# First we will import the necessary Library
import os
import pandas as pd
import numpy as np
import math
import datetime as dt
import matplotlib as plt
# For Evalution we will use these library
from sklearn.metrics import mean_squared_error, mean_absolute_error, explained_variance_score, r2_score
from sklearn.metrics import mean_poisson_deviance, mean_gamma_deviance, accuracy_score
from sklearn.preprocessing import MinMaxScaler
C:\Users\DELL\anaconda3\lib\site-packages\scipy\__init__.py:155: UserWarning: A NumPy version >=1.18.5 and <1.25.0 is required for this version of SciPy (detected version 1.26.4
warnings.warn(f"A NumPy version >={np_minversion} and <{np_maxversion}"
# For model building we will use these library
import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Dropout
from tensorflow.keras.layers import LSTM
WARNING:tensorflow:From C:\Users\DELL\anaconda3\lib\site-packages\keras\src\losses.py:2976: The name tf.losses.sparse_softmax_cross_entropy is deprecated. Please use tf.compat.v1.losses.sparse_softmax_cross_entropy instead.
# For Plotting we will use these librarimport matplotlib.pyplot as plt
from itertools import cycl
import plotly.graph_objects as go
import plotly.express as px
from plotly.subplots import make_subplots
df = pd.read_csv('btc.csv')
df.shape
(2713, 7)
df.head()
| Date | Open | High | Low | Close | Adj Close | Volume | |
|---|---|---|---|---|---|---|---|
| 0 | 2014-09-17 | 465.864014 | 468.174011 | 452.421997 | 457.334015 | 457.334015 | 21056800 |
| 1 | 2014-09-18 | 456.859985 | 456.859985 | 413.104004 | 424.440002 | 424.440002 | 34483200 |
| 2 | 2014-09-19 | 424.102997 | 427.834991 | 384.532013 | 394.795990 | 394.795990 | 37919700 |
| 3 | 2014-09-20 | 394.673004 | 423.295990 | 389.882996 | 408.903992 | 408.903992 | 36863600 |
| 4 | 2014-09-21 | 408.084991 | 412.425995 | 393.181000 | 398.821014 | 398.821014 | 26580100 |
df.tail()
| Date | Open | High | Low | Close | Adj Close | Volume | |
|---|---|---|---|---|---|---|---|
| 2708 | 2022-02-15 | 42586.464844 | 44667.218750 | 42491.035156 | 44575.203125 | 44575.203125 | 22721659051 |
| 2709 | 2022-02-16 | 44578.277344 | 44578.277344 | 43456.691406 | 43961.859375 | 43961.859375 | 19792547657 |
| 2710 | 2022-02-17 | 43937.070313 | 44132.972656 | 40249.371094 | 40538.011719 | 40538.011719 | 26246662813 |
| 2711 | 2022-02-18 | 40552.132813 | 40929.152344 | 39637.617188 | 40030.976563 | 40030.976563 | 23310007704 |
| 2712 | 2022-02-19 | 40022.132813 | 40246.027344 | 40010.867188 | 40126.429688 | 40126.429688 | 22263900160 |
df.info()
<class 'pandas.core.frame.DataFrame'> RangeIndex: 2713 entries, 0 to 2712 Data columns (total 7 columns): # Column Non-Null Count Dtype --- ------ -------------- ----- 0 Date 2713 non-null object 1 Open 2713 non-null float64 2 High 2713 non-null float64 3 Low 2713 non-null float64 4 Close 2713 non-null float64 5 Adj Close 2713 non-null float64 6 Volume 2713 non-null int64 dtypes: float64(5), int64(1), object(1) memory usage: 148.5+ KB
df.describe()
| Open | High | Low | Close | Adj Close | Volume | |
|---|---|---|---|---|---|---|
| count | 2713.000000 | 2713.000000 | 2713.000000 | 2713.000000 | 2713.000000 | 2.713000e+03 |
| mean | 11311.041069 | 11614.292482 | 10975.555057 | 11323.914637 | 11323.914637 | 1.470462e+10 |
| std | 16106.428891 | 16537.390649 | 15608.572560 | 16110.365010 | 16110.365010 | 2.001627e+10 |
| min | 176.897003 | 211.731003 | 171.509995 | 178.102997 | 178.102997 | 5.914570e+06 |
| 25% | 606.396973 | 609.260986 | 604.109985 | 606.718994 | 606.718994 | 7.991080e+07 |
| 50% | 6301.569824 | 6434.617676 | 6214.220215 | 6317.609863 | 6317.609863 | 5.098183e+09 |
| 75% | 10452.399414 | 10762.644531 | 10202.387695 | 10462.259766 | 10462.259766 | 2.456992e+10 |
| max | 67549.734375 | 68789.625000 | 66382.062500 | 67566.828125 | 67566.828125 | 3.509679e+11 |
df.isnull().values.sum()
0
# Lets First Take all the Close Price
closedf = df[['Date','Close']]
print("Shape of close dataframe:", closedf.shape)
Shape of close dataframe: (2713, 2)
fig = px.line(closedf, x=closedf.Date, y=closedf.Close,labels={'date':'Date','close':'Close Stock'})
fig.update_traces(marker_line_width=3, opacity=0.8, marker_line_color='orange')
fig.update_layout(title_text='Whole period of timeframe of Bitcoin close price 2014-2022', plot_bgcolor='white',
font_size=15, font_color='black')
fig.update_xaxes(showgrid=False)
fig.update_yaxes(showgrid=False)
fig.show()
# deleting date column and normalizing using MinMax Scaler
del closedf['Date']
scaler=MinMaxScaler(feature_range=(0,1))
closedf=scaler.fit_transform(np.array(closedf).reshape(-1,1))
closedf.shape
(2713, 1)
# we take training set as 60% and 40% testing set
training_size = int(len(closedf)*0.60)
test_size = len(closedf) - training_size
train_data, test_data = closedf[0:training_size,:], closedf[training_size:len(closedf),:1]
print("train_data: ", train_data.shape)
print("test_data: ", test_data.shape)
train_data: (1627, 1) test_data: (1086, 1)
def create_dataset(dataset, time_step=1):
dataX, dataY = [], []
for i in range(len(dataset)-time_step-1):
a = dataset[i:(i+time_step), 0] ###i=0, 0,1,2,3-----99 100
dataX.append(a)
dataY.append(dataset[i + time_step, 0])
return np.array(dataX), np.array(dataY)
time_step = 20
X_train, y_train = create_dataset(train_data, time_step)
X_test, y_test = create_dataset(test_data, time_step)
print("X_train: ", X_train.shape)
print("y_train: ", y_train.shape)
print("X_test: ", X_test.shape)
print("y_test", y_test.shape)
X_train: (1606, 20) y_train: (1606,) X_test: (1065, 20) y_test (1065,)
# reshape input to be [samples, time steps, features] which is required for LSTM
X_train =X_train.reshape(X_train.shape[0],X_train.shape[1] , 1)
X_test = X_test.reshape(X_test.shape[0],X_test.shape[1] , 1)
print("X_train: ", X_train.shape)
print("X_test: ", X_test.shape)
X_train: (1606, 20, 1) X_test: (1065, 20, 1)
model=Sequential()
model.add(LSTM(10,input_shape=(None,1),activation="relu"))
model.add(Dense(1))
model.compile(loss="mean_squared_error",optimizer="adam")
WARNING:tensorflow:From C:\Users\DELL\anaconda3\lib\site-packages\keras\src\backend.py:873: The name tf.get_default_graph is deprecated. Please use tf.compat.v1.get_default_graph instead. WARNING:tensorflow:From C:\Users\DELL\anaconda3\lib\site-packages\keras\src\optimizers\__init__.py:309: The name tf.train.Optimizer is deprecated. Please use tf.compat.v1.train.Optimizer instead.
history = model.fit(X_train,y_train,validation_data=(X_test,y_test),epochs=150,batch_size=32,verbose=1)
Epoch 1/150 WARNING:tensorflow:From C:\Users\DELL\anaconda3\lib\site-packages\keras\src\utils\tf_utils.py:492: The name tf.ragged.RaggedTensorValue is deprecated. Please use tf.compat.v1.ragged.RaggedTensorValue instead. 51/51 [==============================] - 2s 11ms/step - loss: 0.0014 - val_loss: 0.0436 Epoch 2/150 51/51 [==============================] - 0s 7ms/step - loss: 3.4160e-04 - val_loss: 0.0136 Epoch 3/150 51/51 [==============================] - 0s 6ms/step - loss: 5.7750e-05 - val_loss: 0.0144 Epoch 4/150 51/51 [==============================] - 0s 6ms/step - loss: 5.2267e-05 - val_loss: 0.0155 Epoch 5/150 51/51 [==============================] - 0s 6ms/step - loss: 5.2616e-05 - val_loss: 0.0070 Epoch 6/150 51/51 [==============================] - 0s 6ms/step - loss: 5.3882e-05 - val_loss: 0.0057 Epoch 7/150 51/51 [==============================] - 0s 6ms/step - loss: 5.3832e-05 - val_loss: 0.0046 Epoch 8/150 51/51 [==============================] - 0s 7ms/step - loss: 5.1356e-05 - val_loss: 0.0021 Epoch 9/150 51/51 [==============================] - 0s 6ms/step - loss: 4.9432e-05 - val_loss: 0.0029 Epoch 10/150 51/51 [==============================] - 0s 6ms/step - loss: 5.1422e-05 - val_loss: 0.0018 Epoch 11/150 51/51 [==============================] - 0s 6ms/step - loss: 4.7514e-05 - val_loss: 0.0014 Epoch 12/150 51/51 [==============================] - 0s 6ms/step - loss: 4.6319e-05 - val_loss: 0.0011 Epoch 13/150 51/51 [==============================] - 0s 7ms/step - loss: 4.5612e-05 - val_loss: 0.0012 Epoch 14/150 51/51 [==============================] - 0s 6ms/step - loss: 4.4934e-05 - val_loss: 0.0014 Epoch 15/150 51/51 [==============================] - 0s 6ms/step - loss: 4.5411e-05 - val_loss: 0.0012 Epoch 16/150 51/51 [==============================] - 0s 6ms/step - loss: 4.4032e-05 - val_loss: 0.0011 Epoch 17/150 51/51 [==============================] - 0s 5ms/step - loss: 4.3095e-05 - val_loss: 0.0011 Epoch 18/150 51/51 [==============================] - 0s 7ms/step - loss: 4.1808e-05 - val_loss: 0.0020 Epoch 19/150 51/51 [==============================] - 0s 8ms/step - loss: 4.4354e-05 - val_loss: 0.0026 Epoch 20/150 51/51 [==============================] - 0s 7ms/step - loss: 4.1456e-05 - val_loss: 0.0042 Epoch 21/150 51/51 [==============================] - 0s 7ms/step - loss: 3.9440e-05 - val_loss: 0.0028 Epoch 22/150 51/51 [==============================] - 0s 7ms/step - loss: 3.9372e-05 - val_loss: 0.0029 Epoch 23/150 51/51 [==============================] - 0s 6ms/step - loss: 3.8021e-05 - val_loss: 0.0038 Epoch 24/150 51/51 [==============================] - 0s 6ms/step - loss: 3.8708e-05 - val_loss: 0.0044 Epoch 25/150 51/51 [==============================] - 0s 7ms/step - loss: 3.7419e-05 - val_loss: 0.0049 Epoch 26/150 51/51 [==============================] - 0s 6ms/step - loss: 3.7387e-05 - val_loss: 0.0052 Epoch 27/150 51/51 [==============================] - 0s 6ms/step - loss: 3.5654e-05 - val_loss: 0.0062 Epoch 28/150 51/51 [==============================] - 0s 7ms/step - loss: 3.6820e-05 - val_loss: 0.0060 Epoch 29/150 51/51 [==============================] - 0s 6ms/step - loss: 3.7314e-05 - val_loss: 0.0075 Epoch 30/150 51/51 [==============================] - 0s 7ms/step - loss: 3.4327e-05 - val_loss: 0.0074 Epoch 31/150 51/51 [==============================] - 0s 7ms/step - loss: 3.4192e-05 - val_loss: 0.0065 Epoch 32/150 51/51 [==============================] - 0s 6ms/step - loss: 3.4404e-05 - val_loss: 0.0082 Epoch 33/150 51/51 [==============================] - 0s 6ms/step - loss: 3.2585e-05 - val_loss: 0.0077 Epoch 34/150 51/51 [==============================] - 0s 6ms/step - loss: 3.2191e-05 - val_loss: 0.0069 Epoch 35/150 51/51 [==============================] - 0s 7ms/step - loss: 3.1684e-05 - val_loss: 0.0060 Epoch 36/150 51/51 [==============================] - 0s 6ms/step - loss: 3.1646e-05 - val_loss: 0.0057 Epoch 37/150 51/51 [==============================] - 0s 6ms/step - loss: 3.0677e-05 - val_loss: 0.0050 Epoch 38/150 51/51 [==============================] - 0s 7ms/step - loss: 2.9765e-05 - val_loss: 0.0051 Epoch 39/150 51/51 [==============================] - 0s 7ms/step - loss: 3.2827e-05 - val_loss: 0.0047 Epoch 40/150 51/51 [==============================] - 0s 8ms/step - loss: 2.9310e-05 - val_loss: 0.0032 Epoch 41/150 51/51 [==============================] - 0s 7ms/step - loss: 2.9874e-05 - val_loss: 0.0031 Epoch 42/150 51/51 [==============================] - 0s 6ms/step - loss: 2.8639e-05 - val_loss: 0.0034 Epoch 43/150 51/51 [==============================] - 0s 7ms/step - loss: 2.8197e-05 - val_loss: 0.0037 Epoch 44/150 51/51 [==============================] - 0s 7ms/step - loss: 3.0081e-05 - val_loss: 0.0028 Epoch 45/150 51/51 [==============================] - 0s 7ms/step - loss: 2.9428e-05 - val_loss: 0.0022 Epoch 46/150 51/51 [==============================] - 0s 7ms/step - loss: 2.7525e-05 - val_loss: 0.0023 Epoch 47/150 51/51 [==============================] - 0s 7ms/step - loss: 2.7480e-05 - val_loss: 0.0017 Epoch 48/150 51/51 [==============================] - 0s 6ms/step - loss: 2.8058e-05 - val_loss: 0.0027 Epoch 49/150 51/51 [==============================] - 0s 7ms/step - loss: 2.7337e-05 - val_loss: 0.0020 Epoch 50/150 51/51 [==============================] - 0s 7ms/step - loss: 2.7899e-05 - val_loss: 0.0018 Epoch 51/150 51/51 [==============================] - 0s 7ms/step - loss: 2.7759e-05 - val_loss: 0.0023 Epoch 52/150 51/51 [==============================] - 0s 7ms/step - loss: 2.6649e-05 - val_loss: 0.0013 Epoch 53/150 51/51 [==============================] - 0s 7ms/step - loss: 2.6430e-05 - val_loss: 0.0018 Epoch 54/150 51/51 [==============================] - 0s 7ms/step - loss: 2.8175e-05 - val_loss: 0.0014 Epoch 55/150 51/51 [==============================] - 0s 7ms/step - loss: 2.6214e-05 - val_loss: 0.0013 Epoch 56/150 51/51 [==============================] - 0s 7ms/step - loss: 2.5304e-05 - val_loss: 0.0011 Epoch 57/150 51/51 [==============================] - 0s 6ms/step - loss: 2.7005e-05 - val_loss: 0.0014 Epoch 58/150 51/51 [==============================] - 0s 6ms/step - loss: 2.5021e-05 - val_loss: 0.0020 Epoch 59/150 51/51 [==============================] - 0s 6ms/step - loss: 2.7554e-05 - val_loss: 0.0016 Epoch 60/150 51/51 [==============================] - 0s 7ms/step - loss: 2.4879e-05 - val_loss: 0.0011 Epoch 61/150 51/51 [==============================] - 0s 5ms/step - loss: 2.4773e-05 - val_loss: 0.0013 Epoch 62/150 51/51 [==============================] - 0s 6ms/step - loss: 2.4621e-05 - val_loss: 0.0015 Epoch 63/150 51/51 [==============================] - 0s 7ms/step - loss: 2.4394e-05 - val_loss: 0.0017 Epoch 64/150 51/51 [==============================] - 0s 5ms/step - loss: 2.5209e-05 - val_loss: 0.0017 Epoch 65/150 51/51 [==============================] - 0s 5ms/step - loss: 2.4652e-05 - val_loss: 0.0028 Epoch 66/150 51/51 [==============================] - 0s 6ms/step - loss: 2.4991e-05 - val_loss: 0.0022 Epoch 67/150 51/51 [==============================] - 0s 6ms/step - loss: 2.6851e-05 - val_loss: 0.0031 Epoch 68/150 51/51 [==============================] - 0s 5ms/step - loss: 2.3870e-05 - val_loss: 0.0020 Epoch 69/150 51/51 [==============================] - 0s 5ms/step - loss: 2.4768e-05 - val_loss: 0.0018 Epoch 70/150 51/51 [==============================] - 0s 5ms/step - loss: 2.4309e-05 - val_loss: 0.0027 Epoch 71/150 51/51 [==============================] - 0s 7ms/step - loss: 2.3517e-05 - val_loss: 0.0028 Epoch 72/150 51/51 [==============================] - 0s 6ms/step - loss: 2.3076e-05 - val_loss: 0.0021 Epoch 73/150 51/51 [==============================] - 0s 6ms/step - loss: 3.0740e-05 - val_loss: 0.0030 Epoch 74/150 51/51 [==============================] - 0s 6ms/step - loss: 2.3578e-05 - val_loss: 0.0018 Epoch 75/150 51/51 [==============================] - 0s 6ms/step - loss: 2.4079e-05 - val_loss: 0.0036 Epoch 76/150 51/51 [==============================] - 0s 6ms/step - loss: 2.3535e-05 - val_loss: 0.0038 Epoch 77/150 51/51 [==============================] - 0s 6ms/step - loss: 2.6323e-05 - val_loss: 0.0035 Epoch 78/150 51/51 [==============================] - 0s 6ms/step - loss: 2.3816e-05 - val_loss: 0.0040 Epoch 79/150 51/51 [==============================] - 0s 6ms/step - loss: 2.2556e-05 - val_loss: 0.0035 Epoch 80/150 51/51 [==============================] - 0s 6ms/step - loss: 2.3314e-05 - val_loss: 0.0039 Epoch 81/150 51/51 [==============================] - 0s 6ms/step - loss: 2.3048e-05 - val_loss: 0.0035 Epoch 82/150 51/51 [==============================] - 0s 6ms/step - loss: 2.2568e-05 - val_loss: 0.0047 Epoch 83/150 51/51 [==============================] - 0s 6ms/step - loss: 2.2884e-05 - val_loss: 0.0037 Epoch 84/150 51/51 [==============================] - 0s 7ms/step - loss: 2.2936e-05 - val_loss: 0.0040 Epoch 85/150 51/51 [==============================] - 0s 6ms/step - loss: 2.2116e-05 - val_loss: 0.0049 Epoch 86/150 51/51 [==============================] - 0s 7ms/step - loss: 2.1802e-05 - val_loss: 0.0039 Epoch 87/150 51/51 [==============================] - 0s 7ms/step - loss: 2.3796e-05 - val_loss: 0.0047 Epoch 88/150 51/51 [==============================] - 0s 7ms/step - loss: 2.1569e-05 - val_loss: 0.0051 Epoch 89/150 51/51 [==============================] - 0s 7ms/step - loss: 2.2911e-05 - val_loss: 0.0060 Epoch 90/150 51/51 [==============================] - 0s 7ms/step - loss: 2.2433e-05 - val_loss: 0.0053 Epoch 91/150 51/51 [==============================] - 0s 6ms/step - loss: 2.1232e-05 - val_loss: 0.0063 Epoch 92/150 51/51 [==============================] - 0s 5ms/step - loss: 2.1586e-05 - val_loss: 0.0053 Epoch 93/150 51/51 [==============================] - 0s 6ms/step - loss: 2.1134e-05 - val_loss: 0.0054 Epoch 94/150 51/51 [==============================] - 0s 6ms/step - loss: 2.3495e-05 - val_loss: 0.0088 Epoch 95/150 51/51 [==============================] - 0s 5ms/step - loss: 2.1133e-05 - val_loss: 0.0077 Epoch 96/150 51/51 [==============================] - 0s 5ms/step - loss: 2.1092e-05 - val_loss: 0.0085 Epoch 97/150 51/51 [==============================] - 0s 5ms/step - loss: 2.1070e-05 - val_loss: 0.0078 Epoch 98/150 51/51 [==============================] - 0s 6ms/step - loss: 2.0620e-05 - val_loss: 0.0070 Epoch 99/150 51/51 [==============================] - 0s 5ms/step - loss: 2.0839e-05 - val_loss: 0.0066 Epoch 100/150 51/51 [==============================] - 0s 6ms/step - loss: 2.1449e-05 - val_loss: 0.0054 Epoch 101/150 51/51 [==============================] - 0s 6ms/step - loss: 2.1337e-05 - val_loss: 0.0065 Epoch 102/150 51/51 [==============================] - 0s 6ms/step - loss: 2.1476e-05 - val_loss: 0.0060 Epoch 103/150 51/51 [==============================] - 0s 6ms/step - loss: 2.1125e-05 - val_loss: 0.0061 Epoch 104/150 51/51 [==============================] - 0s 6ms/step - loss: 2.3646e-05 - val_loss: 0.0069 Epoch 105/150 51/51 [==============================] - 0s 7ms/step - loss: 2.1293e-05 - val_loss: 0.0073 Epoch 106/150 51/51 [==============================] - 0s 6ms/step - loss: 2.0687e-05 - val_loss: 0.0063 Epoch 107/150 51/51 [==============================] - 0s 6ms/step - loss: 2.0435e-05 - val_loss: 0.0070 Epoch 108/150 51/51 [==============================] - 0s 6ms/step - loss: 2.0562e-05 - val_loss: 0.0077 Epoch 109/150 51/51 [==============================] - 0s 5ms/step - loss: 2.0335e-05 - val_loss: 0.0066 Epoch 110/150 51/51 [==============================] - 0s 5ms/step - loss: 2.1551e-05 - val_loss: 0.0072 Epoch 111/150 51/51 [==============================] - 0s 5ms/step - loss: 1.9950e-05 - val_loss: 0.0070 Epoch 112/150 51/51 [==============================] - 0s 6ms/step - loss: 2.0169e-05 - val_loss: 0.0068 Epoch 113/150 51/51 [==============================] - 0s 5ms/step - loss: 1.9600e-05 - val_loss: 0.0067 Epoch 114/150 51/51 [==============================] - 0s 5ms/step - loss: 1.9578e-05 - val_loss: 0.0073 Epoch 115/150 51/51 [==============================] - 0s 5ms/step - loss: 2.1301e-05 - val_loss: 0.0073 Epoch 116/150 51/51 [==============================] - 0s 6ms/step - loss: 1.9898e-05 - val_loss: 0.0083 Epoch 117/150 51/51 [==============================] - 0s 5ms/step - loss: 1.9482e-05 - val_loss: 0.0079 Epoch 118/150 51/51 [==============================] - 0s 5ms/step - loss: 2.0049e-05 - val_loss: 0.0077 Epoch 119/150 51/51 [==============================] - 0s 6ms/step - loss: 1.9590e-05 - val_loss: 0.0082 Epoch 120/150 51/51 [==============================] - 0s 5ms/step - loss: 1.9921e-05 - val_loss: 0.0093 Epoch 121/150 51/51 [==============================] - 0s 5ms/step - loss: 2.0924e-05 - val_loss: 0.0087 Epoch 122/150 51/51 [==============================] - 0s 5ms/step - loss: 2.0343e-05 - val_loss: 0.0086 Epoch 123/150 51/51 [==============================] - 0s 6ms/step - loss: 2.0100e-05 - val_loss: 0.0086 Epoch 124/150 51/51 [==============================] - 0s 5ms/step - loss: 2.1365e-05 - val_loss: 0.0091 Epoch 125/150 51/51 [==============================] - 0s 5ms/step - loss: 2.2505e-05 - val_loss: 0.0095 Epoch 126/150 51/51 [==============================] - 0s 5ms/step - loss: 1.9373e-05 - val_loss: 0.0092 Epoch 127/150 51/51 [==============================] - 0s 7ms/step - loss: 1.9244e-05 - val_loss: 0.0101 Epoch 128/150 51/51 [==============================] - 0s 6ms/step - loss: 2.0523e-05 - val_loss: 0.0101 Epoch 129/150 51/51 [==============================] - 0s 6ms/step - loss: 1.9571e-05 - val_loss: 0.0094 Epoch 130/150 51/51 [==============================] - 0s 6ms/step - loss: 1.9571e-05 - val_loss: 0.0099 Epoch 131/150 51/51 [==============================] - 0s 5ms/step - loss: 1.8706e-05 - val_loss: 0.0097 Epoch 132/150 51/51 [==============================] - 0s 6ms/step - loss: 1.9619e-05 - val_loss: 0.0095 Epoch 133/150 51/51 [==============================] - 0s 7ms/step - loss: 1.8667e-05 - val_loss: 0.0105 Epoch 134/150 51/51 [==============================] - 0s 6ms/step - loss: 1.8528e-05 - val_loss: 0.0095 Epoch 135/150 51/51 [==============================] - 0s 5ms/step - loss: 1.9352e-05 - val_loss: 0.0098 Epoch 136/150 51/51 [==============================] - 0s 6ms/step - loss: 1.8748e-05 - val_loss: 0.0099 Epoch 137/150 51/51 [==============================] - 0s 6ms/step - loss: 1.8540e-05 - val_loss: 0.0102 Epoch 138/150 51/51 [==============================] - 0s 6ms/step - loss: 1.9671e-05 - val_loss: 0.0093 Epoch 139/150 51/51 [==============================] - 0s 6ms/step - loss: 2.0606e-05 - val_loss: 0.0107 Epoch 140/150 51/51 [==============================] - 0s 6ms/step - loss: 1.9548e-05 - val_loss: 0.0122 Epoch 141/150 51/51 [==============================] - 0s 6ms/step - loss: 1.8781e-05 - val_loss: 0.0111 Epoch 142/150 51/51 [==============================] - 0s 6ms/step - loss: 1.8399e-05 - val_loss: 0.0102 Epoch 143/150 51/51 [==============================] - 0s 6ms/step - loss: 1.9294e-05 - val_loss: 0.0100 Epoch 144/150 51/51 [==============================] - 0s 6ms/step - loss: 1.9000e-05 - val_loss: 0.0106 Epoch 145/150 51/51 [==============================] - 0s 6ms/step - loss: 1.9566e-05 - val_loss: 0.0119 Epoch 146/150 51/51 [==============================] - 0s 6ms/step - loss: 1.9124e-05 - val_loss: 0.0107 Epoch 147/150 51/51 [==============================] - 0s 6ms/step - loss: 1.8421e-05 - val_loss: 0.0107 Epoch 148/150 51/51 [==============================] - 0s 6ms/step - loss: 1.7817e-05 - val_loss: 0.0120 Epoch 149/150 51/51 [==============================] - 0s 6ms/step - loss: 1.8533e-05 - val_loss: 0.0117 Epoch 150/150 51/51 [==============================] - 0s 6ms/step - loss: 1.8433e-05 - val_loss: 0.0117
### Lets Do the prediction and check performance metrics
train_predict=model.predict(X_train)
test_predict=model.predict(X_test)
train_predict.shape, test_predict.shape
51/51 [==============================] - 0s 2ms/step 34/34 [==============================] - 0s 2ms/step
((1606, 1), (1065, 1))
train_predict = scaler.inverse_transform(train_predict)
test_predict = scaler.inverse_transform(test_predict)
original_ytrain = scaler.inverse_transform(y_train.reshape(-1,1))
original_ytest = scaler.inverse_transform(y_test.reshape(-1,1))
# Evaluation metrices RMSE
print("Train data RMSE: ", math.sqrt(mean_squared_error(original_ytrain,train_predict)))
print("-------------------------------------------------------------------------------------")
print("Test data RMSE: ", math.sqrt(mean_squared_error(original_ytest,test_predict)))
Train data RMSE: 280.9547894583391 ------------------------------------------------------------------------------------- Test data RMSE: 7299.367960666792